In [9]:
    
%%latex
\begin{aligned}
Hypothesis: \\
    h_\theta(x) = \theta_0 + \theta_1 x \\
\\
Parameters: \\
    \theta_0, \theta_1 \\
\\
Cost function (error): \\
    J(\theta_0, \theta_1) = \frac{1}{2m} \sum_{i = 1}^{m} ( h_\theta(x^{(i)}) - y^{(i)} )^{2} \\
\\
Goal: \\
    \underset{\theta_0, \theta_1}{min} J(\theta_0, \theta_1) \\
\end{aligned}
    
    
In [11]:
    
%%latex
\begin{aligned}
    \alpha \rightarrow learning rate (step size)
\end{aligned}
    
    
repeat until convergence {
(simultaneously update j = 0 and j = 1)
}
In [ ]: